Rete Neurale

Caricamento delle librerie

In [1]:
import os
import pandas as pd
from numpy import loadtxt
import numpy as np
import datetime
import tensorflow as tf

import matplotlib.pyplot as plt
from sklearn.preprocessing import MinMaxScaler
import time

# For LSTM model
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.layers import Dropout
from keras.callbacks import EarlyStopping
from keras.models import load_model

#from ann_visualizer.visualize import ann_viz
from keras.utils.vis_utils import plot_model

import plotly.graph_objects as go
import plotly.express as px
import plotly.io as pio

# Themes
# "seaborn", 'ggplot2', 'seaborn', 'simple_white', 'plotly', 'plotly_white', 'plotly_dark', 'presentation', 'xgridoff', 'ygridoff', 'gridon', 'none'
pio.templates.default = "presentation" 

from plotly.subplots import make_subplots
Using TensorFlow backend.

Seleziona cartelle e carica file

In [2]:
# select data folder_dati
glob_path = os.path.normpath(os.getcwd() + os.sep + os.pardir)
folder_dati = glob_path+R"//dati"
folder_risultati = glob_path+R"//risultati"
In [3]:
files = os.listdir(folder_risultati)
files[0:]
Out[3]:
['df_hour.csv',
 'shampoo.csv',
 'df_grouped_date.csv',
 'df_grouped_date_risorsa.csv',
 'df_grouped_hour.csv',
 'export.csv',
 'df_grouped_date_cama_nosunday.csv',
 'export.xlsx',
 'df_min_ciclo_nosunday_no0.csv',
 'df_grouped_date_nosunday.csv']
In [4]:
# Load df_grouped_date, original file
df = pd.read_csv(folder_risultati+"/df_grouped_date_nosunday.csv", sep = ",")
df2 = pd.read_csv(folder_risultati+"/df_min_ciclo_nosunday_no0.csv", sep = ",")
df = df2
In [5]:
# substitute null values with 0
df['Minuti ciclo'] = df['Minuti ciclo'].fillna(0)
#df['Minuti NO ciclo'] = df['Minuti NO ciclo'].fillna(0)
#df['Efficienza'] = df['Efficienza'].fillna(0)

# convert Date 
df['Date'] = pd.to_datetime(df["Date"], format='%Y-%m-%d')

# convert minuti ciclo, no ciclo, efficienza to double
df['Minuti ciclo'] = df['Minuti ciclo'].replace(',', '.', regex=True).astype(float)
#df['Minuti NO ciclo'] = df['Minuti NO ciclo'].replace(',', '.', regex=True).astype(float)
#df['Efficienza'] = df['Efficienza'].replace(',', '.', regex=True).astype(float)
In [6]:
df
Out[6]:
Date Minuti ciclo
0 2019-01-01 1199.102987
1 2019-01-02 1199.102987
2 2019-01-03 1199.102987
3 2019-01-04 1199.102987
4 2019-01-05 1199.102987
... ... ...
486 2020-07-21 1881.433333
487 2020-07-22 2074.900000
488 2020-07-23 2184.450000
489 2020-07-24 2052.766667
490 2020-07-25 775.850000

491 rows × 2 columns

In [7]:
# plot general efficiency
fig = px.line(df, x = "Date", y = "Minuti ciclo", 
              title = "Efficienza / giorno Totale delle macchine")
fig.show()

Seleziona train e test

In [8]:
train = df[:313]

# Scale features
s1 = MinMaxScaler(feature_range=(-1,1))
Xs = s1.fit_transform(train[['Minuti ciclo']])

# Scale predicted value
s2 = MinMaxScaler(feature_range=(-1,1))
Ys = s2.fit_transform(train[['Minuti ciclo']])

Usa una finestra di osservazioni di 7 giorni per far girare la rete neurale

In [9]:
# Each time step uses last 'window' to predict the next change
window = 6
X = []
Y = []
for i in range(window,len(Xs)):
    X.append(Xs[i-window:i,:])
    Y.append(Ys[i])

# Reshape data to format accepted by LSTM
X, Y = np.array(X), np.array(Y)

Crea il modello e lo alleni

In [42]:
# create and train LSTM model

# Initialize LSTM model
model1 = Sequential()

model1.add(LSTM(units=50, return_sequences=True, \
          input_shape=(X.shape[1],X.shape[2])))
model1.add(Dropout(0.2))
model1.add(LSTM(units=50, return_sequences=True))
model1.add(Dropout(0.2))
model1.add(LSTM(units=50))
model1.add(Dropout(0.2))
model1.add(Dense(units=1))
model1.compile(optimizer = 'adam', loss = 'mean_squared_error',\
              metrics = ['accuracy'])
In [43]:
# Allow for early exit
es = EarlyStopping(monitor='loss',mode='min',verbose=1,patience=10)

# Fit (and time) LSTM model
t0 = time.time()
history = model1.fit(X, Y, epochs = 20, batch_size = 250, callbacks=[es], verbose=1)
t1 = time.time()
print('Runtime: %.2f s' %(t1-t0))
Epoch 1/20
307/307 [==============================] - 8s 27ms/step - loss: 0.1842 - accuracy: 0.0000e+00
Epoch 2/20
307/307 [==============================] - 0s 883us/step - loss: 0.1737 - accuracy: 0.0000e+00
Epoch 3/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1601 - accuracy: 0.0000e+00
Epoch 4/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1507 - accuracy: 0.0000e+00
Epoch 5/20
307/307 [==============================] - 0s 861us/step - loss: 0.1426 - accuracy: 0.0000e+00
Epoch 6/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1379 - accuracy: 0.0000e+00
Epoch 7/20
307/307 [==============================] - 0s 912us/step - loss: 0.1312 - accuracy: 0.0000e+00
Epoch 8/20
307/307 [==============================] - 0s 883us/step - loss: 0.1208 - accuracy: 0.0000e+00
Epoch 9/20
307/307 [==============================] - 0s 990us/step - loss: 0.1182 - accuracy: 0.0000e+00
Epoch 10/20
307/307 [==============================] - 0s 878us/step - loss: 0.1233 - accuracy: 0.0000e+00
Epoch 11/20
307/307 [==============================] - 0s 870us/step - loss: 0.1215 - accuracy: 0.0000e+00
Epoch 12/20
307/307 [==============================] - 0s 894us/step - loss: 0.1199 - accuracy: 0.0000e+00
Epoch 13/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1165 - accuracy: 0.0000e+00
Epoch 14/20
307/307 [==============================] - 0s 979us/step - loss: 0.1152 - accuracy: 0.0000e+00
Epoch 15/20
307/307 [==============================] - 0s 988us/step - loss: 0.1151 - accuracy: 0.0000e+00
Epoch 16/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1180 - accuracy: 0.0000e+00
Epoch 17/20
307/307 [==============================] - 0s 943us/step - loss: 0.1165 - accuracy: 0.0000e+00
Epoch 18/20
307/307 [==============================] - 0s 871us/step - loss: 0.1178 - accuracy: 0.0000e+00
Epoch 19/20
307/307 [==============================] - 0s 952us/step - loss: 0.1145 - accuracy: 0.0033
Epoch 20/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1138 - accuracy: 0.0000e+00
Runtime: 18.39 s
In [44]:
# Plot loss
plt.figure(figsize=(20,4))
plt.semilogy(history.history['loss'])
plt.xlabel('epoch'); plt.ylabel('loss')

# Verify the fit of the model
Yp = model1.predict(X)
In [45]:
# un-scale outputs
Yu = s2.inverse_transform(Yp)
Ym = s2.inverse_transform(Y)

plt.figure(figsize=(20,6))
plt.subplot(1,1,1)
plt.plot(train['Date'][window:],Yu,'r-',label='LSTM')
plt.plot(train['Date'][window:],Ym,'k--',label='Real')
plt.ylabel('Minuti ciclo')
plt.legend()
Out[45]:
<matplotlib.legend.Legend at 0x1a52011810>
In [47]:
# RMSE on train
from sklearn.metrics import mean_squared_error
from math import sqrt

rmse1_train = sqrt(mean_squared_error(Ym, Yu))
rmse1_train
Out[47]:
367.50454307753205
In [48]:
# Load model
v = model1

# Load testing data
test = df[313:497]

Xt = test[['Minuti ciclo']].values
Yt = test[['Minuti ciclo']].values

Xts = s1.transform(Xt)
Yts = s2.transform(Yt)

Xti = []
Yti = []
for i in range(window,len(Xts)):
    Xti.append(Xts[i-window:i,:])
    Yti.append(Yts[i])

# Reshape data to format accepted by LSTM
Xti, Yti = np.array(Xti), np.array(Yti)

# Verify the fit of the model
Ytp = model.predict(Xti)
In [49]:
# un-scale outputs
Ytu = s2.inverse_transform(Ytp) # predicted
Ytm = s2.inverse_transform(Yti) # real

plt.figure(figsize=(20,6))
plt.subplot(1,1,1)
plt.plot(test['Date'][window:],Ytu,'r-',label='LSTM Predicted')
plt.plot(test['Date'][window:],Ytm,'k--',label='Real')
plt.legend()
plt.ylabel('Minuti ciclo')
Out[49]:
Text(0, 0.5, 'Minuti ciclo')
In [50]:
Ytu
Out[50]:
array([[1125.9178],
       [1096.2533],
       [1072.2775],
       [1319.4241],
       [1419.3767],
       [1557.2017],
       [1565.6034],
       [1506.4086],
       [1372.3696],
       [1185.5491],
       [1513.005 ],
       [1600.627 ],
       [1574.136 ],
       [1452.567 ],
       [1241.4906],
       [1122.9775],
       [1549.6477],
       [1511.6792],
       [1515.3109],
       [1406.0402],
       [1421.8813],
       [1293.042 ],
       [1834.1118],
       [1988.4983],
       [2079.2297],
       [1840.4978],
       [1706.9658],
       [1476.6826],
       [1969.8573],
       [1897.5212],
       [1929.8744],
       [1867.6793],
       [1699.5088],
       [1522.9286],
       [1989.6483],
       [1843.7168],
       [1667.0752],
       [1618.5016],
       [1469.5311],
       [1304.6122],
       [1858.7566],
       [1812.3206],
       [1839.7657],
       [1715.2526],
       [1588.31  ],
       [1228.1305],
       [1720.0905],
       [1656.1368],
       [1479.3445],
       [1344.731 ],
       [1420.0625],
       [1443.418 ],
       [1900.3588],
       [1885.663 ],
       [1718.4115],
       [1511.0802],
       [1438.6393],
       [1194.4292],
       [1587.412 ],
       [1832.0758],
       [1836.2056],
       [1849.5345],
       [1795.0925],
       [1694.9504],
       [1944.0977],
       [1712.9844],
       [1317.6532],
       [1196.9033],
       [1196.9033],
       [1196.9033],
       [1196.9033],
       [1196.9033],
       [1196.9033],
       [1204.869 ],
       [1229.6251],
       [1280.0181],
       [1347.0005],
       [1435.0789],
       [1550.4595],
       [1360.5713],
       [1154.5947],
       [ 982.0527],
       [1493.9695],
       [1557.7667],
       [1598.5305],
       [1580.181 ],
       [1515.1531],
       [1371.4438],
       [1504.9156],
       [1686.3088],
       [1651.0359],
       [1495.1051],
       [1340.8818],
       [1152.8389],
       [1489.479 ],
       [1435.2053],
       [1550.7029],
       [1590.2261],
       [1389.5804],
       [1448.7574],
       [1616.6342],
       [1677.6078],
       [1679.7239],
       [1576.3127],
       [1461.278 ],
       [1665.6312],
       [1924.6158],
       [1849.3147],
       [1734.4421],
       [1502.2568],
       [1419.2747],
       [1438.8356],
       [1909.9379],
       [1873.7396],
       [1777.7166],
       [1745.8928],
       [1538.3005],
       [1146.0996],
       [1479.5415],
       [1555.7169],
       [1875.6731],
       [1831.2335],
       [1771.6102],
       [1578.0594],
       [2190.6907],
       [2121.5698],
       [2035.9257],
       [1888.2407],
       [1789.895 ],
       [1484.5071],
       [1888.81  ],
       [1701.6688],
       [1982.3624],
       [2004.5381],
       [1833.9606],
       [1370.6334],
       [1935.8881],
       [1866.7399],
       [1762.4645],
       [1719.8971],
       [1546.2249],
       [1432.0228],
       [1890.0074],
       [2023.722 ],
       [1868.5782],
       [1703.636 ],
       [1539.1775],
       [1516.3032],
       [2011.4777],
       [2110.6062],
       [1973.2438],
       [1861.4395],
       [1689.9591],
       [1505.4238],
       [2006.8037],
       [2083.0571],
       [1948.0144],
       [1619.1725],
       [1376.4757],
       [1219.4603],
       [1620.1367],
       [1763.0643],
       [1869.7933],
       [1530.6456],
       [1544.5936],
       [1471.1465],
       [1973.0304],
       [2196.1216],
       [2185.4172],
       [1947.6499],
       [1529.5929],
       [1370.2351]], dtype=float32)

Calcola indice RMSE

In [51]:
from sklearn.metrics import mean_squared_error
from math import sqrt
pred = Ytu[:201]
real = Ytm[:201]

rmse1_test = sqrt(mean_squared_error(real, pred))
rmse1_test
Out[51]:
484.74732633053304
In [52]:
plot_model(model, to_file='model_plot.png', show_shapes=True, show_layer_names=True)
Out[52]:
In [53]:
# second structure

# Initialize LSTM model
model2 = Sequential()

model2.add(LSTM(units=50, return_sequences=True, \
          input_shape=(X.shape[1],X.shape[2])))
model2.add(Dropout(0.2))
model2.add(LSTM(units=30))
model2.add(Dropout(0.2))
model2.add(Dense(units=1))
model2.compile(optimizer = 'adam', loss = 'mean_squared_error',\
              metrics = ['accuracy'])
In [54]:
# Allow for early exit
es = EarlyStopping(monitor='loss',mode='min',verbose=1,patience=10)

# Fit (and time) LSTM model
t0 = time.time()
history = model2.fit(X, Y, epochs = 20, batch_size = 250, callbacks=[es], verbose=1)
t1 = time.time()
print('Runtime: %.2f s' %(t1-t0))
Epoch 1/20
307/307 [==============================] - 5s 17ms/step - loss: 0.2041 - accuracy: 0.0000e+00
Epoch 2/20
307/307 [==============================] - 0s 518us/step - loss: 0.1892 - accuracy: 0.0000e+00
Epoch 3/20
307/307 [==============================] - 0s 629us/step - loss: 0.1736 - accuracy: 0.0000e+00
Epoch 4/20
307/307 [==============================] - 0s 637us/step - loss: 0.1600 - accuracy: 0.0000e+00
Epoch 5/20
307/307 [==============================] - 0s 642us/step - loss: 0.1495 - accuracy: 0.0000e+00
Epoch 6/20
307/307 [==============================] - 0s 972us/step - loss: 0.1431 - accuracy: 0.0000e+00
Epoch 7/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1372 - accuracy: 0.0000e+00
Epoch 8/20
307/307 [==============================] - 0s 678us/step - loss: 0.1319 - accuracy: 0.0000e+00
Epoch 9/20
307/307 [==============================] - 0s 523us/step - loss: 0.1248 - accuracy: 0.0000e+00
Epoch 10/20
307/307 [==============================] - 0s 677us/step - loss: 0.1239 - accuracy: 0.0000e+00
Epoch 11/20
307/307 [==============================] - 0s 732us/step - loss: 0.1222 - accuracy: 0.0000e+00
Epoch 12/20
307/307 [==============================] - 0s 617us/step - loss: 0.1224 - accuracy: 0.0000e+00
Epoch 13/20
307/307 [==============================] - 0s 585us/step - loss: 0.1243 - accuracy: 0.0000e+00
Epoch 14/20
307/307 [==============================] - 0s 585us/step - loss: 0.1255 - accuracy: 0.0000e+00
Epoch 15/20
307/307 [==============================] - 0s 570us/step - loss: 0.1249 - accuracy: 0.0000e+00
Epoch 16/20
307/307 [==============================] - 0s 551us/step - loss: 0.1210 - accuracy: 0.0000e+00
Epoch 17/20
307/307 [==============================] - 0s 645us/step - loss: 0.1193 - accuracy: 0.0000e+00
Epoch 18/20
307/307 [==============================] - 0s 583us/step - loss: 0.1192 - accuracy: 0.0000e+00
Epoch 19/20
307/307 [==============================] - 0s 589us/step - loss: 0.1194 - accuracy: 0.0000e+00
Epoch 20/20
307/307 [==============================] - 0s 587us/step - loss: 0.1191 - accuracy: 0.0000e+00
Runtime: 12.54 s
In [55]:
# Plot loss
plt.figure(figsize=(20,4))
plt.semilogy(history.history['loss'])
plt.xlabel('epoch'); plt.ylabel('loss')

# Verify the fit of the model
Yp = model2.predict(X)
In [56]:
# un-scale outputs
Yu = s2.inverse_transform(Yp)
Ym = s2.inverse_transform(Y)

plt.figure(figsize=(20,6))
plt.subplot(1,1,1)
plt.plot(train['Date'][window:],Yu,'r-',label='LSTM')
plt.plot(train['Date'][window:],Ym,'k--',label='Real')
plt.ylabel('Minuti ciclo')
plt.legend()
Out[56]:
<matplotlib.legend.Legend at 0x1a54e5d110>
In [57]:
# RMSE on train
from sklearn.metrics import mean_squared_error
from math import sqrt

rmse2_train = sqrt(mean_squared_error(Ym, Yu))
rmse2_train
Out[57]:
376.8478492015935
In [58]:
# Load model
v = model2

# Load testing data
test = df[313:497]

Xt = test[['Minuti ciclo']].values
Yt = test[['Minuti ciclo']].values

Xts = s1.transform(Xt)
Yts = s2.transform(Yt)

Xti = []
Yti = []
for i in range(window,len(Xts)):
    Xti.append(Xts[i-window:i,:])
    Yti.append(Yts[i])

# Reshape data to format accepted by LSTM
Xti, Yti = np.array(Xti), np.array(Yti)

# Verify the fit of the model
Ytp = model2.predict(Xti)
In [59]:
# un-scale outputs
Ytu = s2.inverse_transform(Ytp) # predicted
Ytm = s2.inverse_transform(Yti) # real

plt.figure(figsize=(20,6))
plt.subplot(1,1,1)
plt.plot(test['Date'][window:],Ytu,'r-',label='LSTM Predicted')
plt.plot(test['Date'][window:],Ytm,'k--',label='Real')
plt.legend()
plt.ylabel('Minuti ciclo')
Out[59]:
Text(0, 0.5, 'Minuti ciclo')
In [60]:
from sklearn.metrics import mean_squared_error
from math import sqrt
pred = Ytu[:201]
real = Ytm[:201]

rmse2_test = sqrt(mean_squared_error(real, pred))
rmse2_test
Out[60]:
542.7180529170251
In [61]:
plot_model(model2, to_file='model_plot.png', show_shapes=True, show_layer_names=True)
Out[61]:
In [82]:
# third structure

# Initialize LSTM model
model3 = Sequential()

model3.add(LSTM(units=50, return_sequences=True, \
          input_shape=(X.shape[1],X.shape[2])))
model3.add(Dropout(0.2))
model3.add(LSTM(units=50, return_sequences=True))
model3.add(Dropout(0.2))
model3.add(LSTM(units=40, return_sequences=True))
model3.add(Dropout(0.2))
model3.add(LSTM(units=40))
model3.add(Dropout(0.2))
model3.add(Dense(units=1))
model3.compile(optimizer = 'adam', loss = 'mean_squared_error',\
              metrics = ['accuracy'])
In [83]:
# Allow for early exit
es = EarlyStopping(monitor='loss',mode='min',verbose=1,patience=10)

# Fit (and time) LSTM model
t0 = time.time()
history = model3.fit(X, Y, epochs = 20, batch_size = 250, callbacks=[es], verbose=1)
t1 = time.time()
print('Runtime: %.2f s' %(t1-t0))
Epoch 1/20
307/307 [==============================] - 15s 48ms/step - loss: 0.1821 - accuracy: 0.0000e+00
Epoch 2/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1751 - accuracy: 0.0000e+00
Epoch 3/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1664 - accuracy: 0.0000e+00
Epoch 4/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1578 - accuracy: 0.0000e+00
Epoch 5/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1518 - accuracy: 0.0000e+00
Epoch 6/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1385 - accuracy: 0.0000e+00
Epoch 7/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1232 - accuracy: 0.0000e+00
Epoch 8/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1126 - accuracy: 0.0000e+00
Epoch 9/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1256 - accuracy: 0.0000e+00
Epoch 10/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1224 - accuracy: 0.0000e+00
Epoch 11/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1180 - accuracy: 0.0000e+00
Epoch 12/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1146 - accuracy: 0.0000e+00
Epoch 13/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1121 - accuracy: 0.0000e+00
Epoch 14/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1151 - accuracy: 0.0000e+00
Epoch 15/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1167 - accuracy: 0.0000e+00
Epoch 16/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1172 - accuracy: 0.0033
Epoch 17/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1127 - accuracy: 0.0000e+00
Epoch 18/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1113 - accuracy: 0.0000e+00
Epoch 19/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1118 - accuracy: 0.0000e+00
Epoch 20/20
307/307 [==============================] - 0s 1ms/step - loss: 0.1099 - accuracy: 0.0000e+00
Runtime: 31.02 s
In [84]:
# Plot loss
plt.figure(figsize=(20,4))
plt.semilogy(history.history['loss'])
plt.xlabel('epoch'); plt.ylabel('loss')

# Verify the fit of the model
Yp = model3.predict(X)
In [85]:
# un-scale outputs
Yu = s2.inverse_transform(Yp)
Ym = s2.inverse_transform(Y)

plt.figure(figsize=(20,6))
plt.subplot(1,1,1)
plt.plot(train['Date'][window:],Yu,'r-',label='LSTM')
plt.plot(train['Date'][window:],Ym,'k--',label='Real')
plt.ylabel('Minuti ciclo')
plt.legend()
Out[85]:
<matplotlib.legend.Legend at 0x1a62775310>
In [86]:
# RMSE on train
from sklearn.metrics import mean_squared_error
from math import sqrt

rmse3_train = sqrt(mean_squared_error(Ym, Yu))
rmse3_train
Out[86]:
359.92569612516826
In [87]:
# Load model
v = model3

# Load testing data
test = df[313:497]

Xt = test[['Minuti ciclo']].values
Yt = test[['Minuti ciclo']].values

Xts = s1.transform(Xt)
Yts = s2.transform(Yt)

Xti = []
Yti = []
for i in range(window,len(Xts)):
    Xti.append(Xts[i-window:i,:])
    Yti.append(Yts[i])

# Reshape data to format accepted by LSTM
Xti, Yti = np.array(Xti), np.array(Yti)

# Verify the fit of the model
Ytp = model3.predict(Xti)
In [88]:
# un-scale outputs
Ytu = s2.inverse_transform(Ytp) # predicted
Ytm = s2.inverse_transform(Yti) # real

plt.figure(figsize=(20,6))
plt.subplot(1,1,1)
plt.plot(test['Date'][window:],Ytu,'r-',label='LSTM Predicted')
plt.plot(test['Date'][window:],Ytm,'k--',label='Real')
plt.legend()
plt.ylabel('Minuti ciclo')
Out[88]:
Text(0, 0.5, 'Minuti ciclo')
In [89]:
from sklearn.metrics import mean_squared_error
from math import sqrt
pred = Ytu[:201]
real = Ytm[:201]

rmse3_test = sqrt(mean_squared_error(real, pred))
rmse3_test
Out[89]:
478.9465875974628
In [90]:
plot_model(model3, to_file='model_plot.png', show_shapes=True, show_layer_names=True)
Out[90]:
In [72]:
# 4 structure

# Initialize LSTM model
model4 = Sequential()

model4.add(LSTM(units=50, return_sequences=True, \
          input_shape=(X.shape[1],X.shape[2])))
model4.add(Dropout(0.2))
model4.add(LSTM(units=50, return_sequences=True))
model4.add(Dropout(0.2))
model4.add(LSTM(units=40, return_sequences=True))
model4.add(Dropout(0.2))
model4.add(LSTM(units=40))
model4.add(Dropout(0.2))
model4.add(Dense(units=1))
model4.compile(optimizer = 'adam', loss = 'mean_squared_error',\
              metrics = ['accuracy'])
In [73]:
# Allow for early exit
es = EarlyStopping(monitor='loss',mode='min',verbose=1,patience=10)

# Fit (and time) LSTM model
t0 = time.time()
history = model4.fit(X, Y, epochs = 50, batch_size = 250, callbacks=[es], verbose=1)
t1 = time.time()
print('Runtime: %.2f s' %(t1-t0))
Epoch 1/50
307/307 [==============================] - 12s 38ms/step - loss: 0.1824 - accuracy: 0.0000e+00
Epoch 2/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1739 - accuracy: 0.0000e+00
Epoch 3/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1647 - accuracy: 0.0000e+00
Epoch 4/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1549 - accuracy: 0.0000e+00
Epoch 5/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1512 - accuracy: 0.0000e+00
Epoch 6/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1412 - accuracy: 0.0000e+00
Epoch 7/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1220 - accuracy: 0.0033
Epoch 8/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1153 - accuracy: 0.0000e+00
Epoch 9/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1183 - accuracy: 0.0000e+00
Epoch 10/50
307/307 [==============================] - 1s 2ms/step - loss: 0.1209 - accuracy: 0.0000e+00
Epoch 11/50
307/307 [==============================] - 1s 2ms/step - loss: 0.1134 - accuracy: 0.0000e+00
Epoch 12/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1150 - accuracy: 0.0000e+00
Epoch 13/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1148 - accuracy: 0.0000e+00
Epoch 14/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1132 - accuracy: 0.0000e+00
Epoch 15/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1129 - accuracy: 0.0000e+00
Epoch 16/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1134 - accuracy: 0.0033
Epoch 17/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1116 - accuracy: 0.0000e+00
Epoch 18/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1096 - accuracy: 0.0000e+00
Epoch 19/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1113 - accuracy: 0.0000e+00
Epoch 20/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1109 - accuracy: 0.0000e+00
Epoch 21/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1089 - accuracy: 0.0000e+00
Epoch 22/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1087 - accuracy: 0.0000e+00
Epoch 23/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1104 - accuracy: 0.0000e+00
Epoch 24/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1085 - accuracy: 0.0000e+00
Epoch 25/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1073 - accuracy: 0.0000e+00
Epoch 26/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1073 - accuracy: 0.0000e+00
Epoch 27/50
307/307 [==============================] - 0s 2ms/step - loss: 0.1069 - accuracy: 0.0000e+00
Epoch 28/50
307/307 [==============================] - 1s 2ms/step - loss: 0.1068 - accuracy: 0.0000e+00
Epoch 29/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1073 - accuracy: 0.0000e+00
Epoch 30/50
307/307 [==============================] - 0s 2ms/step - loss: 0.1076 - accuracy: 0.0000e+00
Epoch 31/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1082 - accuracy: 0.0000e+00
Epoch 32/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1045 - accuracy: 0.0000e+00
Epoch 33/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1097 - accuracy: 0.0000e+00
Epoch 34/50
307/307 [==============================] - 0s 982us/step - loss: 0.1076 - accuracy: 0.0033
Epoch 35/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1042 - accuracy: 0.0000e+00
Epoch 36/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1062 - accuracy: 0.0000e+00
Epoch 37/50
307/307 [==============================] - 1s 2ms/step - loss: 0.1066 - accuracy: 0.0000e+00
Epoch 38/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1042 - accuracy: 0.0033
Epoch 39/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1055 - accuracy: 0.0033
Epoch 40/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1057 - accuracy: 0.0033
Epoch 41/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1081 - accuracy: 0.0033
Epoch 42/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1021 - accuracy: 0.0033
Epoch 43/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1057 - accuracy: 0.0000e+00
Epoch 44/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1067 - accuracy: 0.0000e+00
Epoch 45/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1051 - accuracy: 0.0000e+00
Epoch 46/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1057 - accuracy: 0.0000e+00
Epoch 47/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1002 - accuracy: 0.0033
Epoch 48/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1015 - accuracy: 0.0033
Epoch 49/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1033 - accuracy: 0.0033
Epoch 50/50
307/307 [==============================] - 0s 1ms/step - loss: 0.1033 - accuracy: 0.0000e+00
Runtime: 40.22 s
In [74]:
# Plot loss
plt.figure(figsize=(20,4))
plt.semilogy(history.history['loss'])
plt.xlabel('epoch'); plt.ylabel('loss')

# Verify the fit of the model
Yp = model4.predict(X)
In [75]:
# un-scale outputs
Yu = s2.inverse_transform(Yp)
Ym = s2.inverse_transform(Y)

plt.figure(figsize=(20,6))
plt.subplot(1,1,1)
plt.plot(train['Date'][window:],Yu,'r-',label='LSTM')
plt.plot(train['Date'][window:],Ym,'k--',label='Real')
plt.ylabel('Minuti ciclo')
plt.legend()
Out[75]:
<matplotlib.legend.Legend at 0x1a5db77450>
In [76]:
# RMSE on train
from sklearn.metrics import mean_squared_error
from math import sqrt

rmse4_train = sqrt(mean_squared_error(Ym, Yu))
rmse4_train
Out[76]:
348.93628728667926
In [77]:
# Load model
v = model4

# Load testing data
test = df[313:497]

Xt = test[['Minuti ciclo']].values
Yt = test[['Minuti ciclo']].values

Xts = s1.transform(Xt)
Yts = s2.transform(Yt)

Xti = []
Yti = []
for i in range(window,len(Xts)):
    Xti.append(Xts[i-window:i,:])
    Yti.append(Yts[i])

# Reshape data to format accepted by LSTM
Xti, Yti = np.array(Xti), np.array(Yti)

# Verify the fit of the model
Ytp = model4.predict(Xti)
In [78]:
# un-scale outputs
Ytu = s2.inverse_transform(Ytp) # predicted
Ytm = s2.inverse_transform(Yti) # real

plt.figure(figsize=(20,6))
plt.subplot(1,1,1)
plt.plot(test['Date'][window:],Ytu,'r-',label='LSTM Predicted')
plt.plot(test['Date'][window:],Ytm,'k--',label='Real')
plt.legend()
plt.ylabel('Minuti ciclo')
Out[78]:
Text(0, 0.5, 'Minuti ciclo')
In [79]:
from sklearn.metrics import mean_squared_error
from math import sqrt
pred = Ytu[:201]
real = Ytm[:201]

rmse4_test = sqrt(mean_squared_error(real, pred))
rmse4_test
Out[79]:
467.9096202280408
In [80]:
plot_model(model4, to_file='model_plot.png', show_shapes=True, show_layer_names=True)
Out[80]:
In [91]:
print(rmse1_train, rmse1_test, rmse2_train, rmse2_test, rmse3_train, rmse3_test, rmse4_train, rmse4_test)
367.50454307753205 484.74732633053304 376.8478492015935 542.7180529170251 359.92569612516826 478.9465875974628 348.93628728667926 467.9096202280408
In [ ]: